In [1]:
import numpy as np
from iuvs import scaling
from scipy.optimize import curve_fit
from iuvs import io
In [2]:
def get_ordered_arrays(xsize=40, ysize=50):
spec = 2*np.arange(xsize*ysize).reshape(xsize, ysize)
dark = np.arange(xsize*ysize).reshape(xsize, ysize)
return spec, dark
In [42]:
l1b = io.some_l1b()
In [43]:
spec, dark = l1b.get_light_and_dark(-1)
In [15]:
spec, dark = get_ordered_arrays()
In [44]:
xslice, yslice = io.find_scaling_window(spec)
In [77]:
target = spec[xslice,yslice].ravel()
start = dark[xslice,yslice].ravel()
In [72]:
%matplotlib inline
In [59]:
import seaborn as sns
sns.set_style('white')
In [94]:
plt.rcParams['image.interpolation'] = 'none'
In [95]:
l1b.plot_dark_spectogram(-1)
Out[95]:
different
conditions (like different temperatures maybe?) are supported by the method of least absolute deviation
.
In [104]:
target_rescaled = (target - target.min())/(target.max() - target.min())
In [105]:
target_rescaled.std()
Out[105]:
In [106]:
start_rescaled = (start - start.min())/(start.max() - start.min())
In [107]:
start_rescaled.std()
Out[107]:
In [108]:
plt.scatter(start_rescaled, target_rescaled)
Out[108]:
In [110]:
target_standard = (target - target.mean())/target.std()
In [111]:
start_standard = (start - start.mean())/start.std()
In [114]:
plt.scatter(start_standard, target_standard)
Out[114]:
In [109]:
plt.scatter(start, target)
Out[109]:
In [80]:
def multmodel(x, a):
return a*x
def addmodel(x, a):
return a+x
target_mean = spec[xslice,yslice].mean()
start_mean = dark[xslice,yslice].mean()
result = curve_fit(multmodel,
dark[xslice, yslice].ravel(),
spec[xslice, yslice].ravel(),
full_output = True)
print('target/start:', target_mean/start_mean)
In [85]:
sns.set_context('talk')
In [86]:
plt.scatter(start, target)
plt.plot(start, result[0]*start, label='fit')
plt.plot(start, target_mean/start_mean*start, label='math')
plt.legend(loc='best')
Out[86]:
In [91]:
plt.scatter(start, result[2]['f'])
Out[91]:
In [87]:
result
Out[87]:
In [88]:
from scipy.optimize import leastsq
In [89]:
leastsq?
In [35]:
result[:2]
Out[35]:
In [40]:
s_sq = (result[2]['fvec']**2).sum()/(len(result[2]['fvec'])-len(result[0]))
s_sq
Out[40]:
In [38]:
result[2]
Out[38]:
In [178]:
def multmodel(x, a):
return a*x
def addmodel(x, a):
return a+x
In [179]:
n = 100
dark = np.ones(n)
noise = np.random.rand(n)*0.1*dark
ddark = dark + noise
signal = addmodel(ddark, 1.5)
noise = np.random.rand(n)*0.1*signal
dsignal = signal + noise
multscaler = scaling.MultScaler(ddark, dsignal)
multscaler.do_fit()
addscaler = scaling.AddScaler(ddark, dsignal)
addscaler.do_fit()
dsignal.mean() - ddark.mean()
addscaler.p
dsignal.mean()/ddark.mean()
plt.scatter(ddark, dsignal)
plt.plot(ddark, multscaler.p*ddark, label='MULT')
plt.plot(ddark, addscaler.p+ddark, label='ADD')
plt.legend(loc='best')
print((dsignal - ddark*multscaler.p).mean())
print((dsignal - (ddark+multscaler.p)).mean())
In [189]:
for i, fname in enumerate(io.l1b_filenames()):
l1b = io.L1BReader(fname)
spec, dark = l1b.get_light_and_dark(-1)
xslice, yslice = io.find_scaling_window(spec)
target = spec[xslice, yslice]
start = dark[xslice, yslice]
addscaler = scaling.AddScaler(start, target)
addscaler.do_fit()
multscaler = scaling.MultScaler(start, target)
multscaler.do_fit()
fig, ax = plt.subplots(nrows=1)
ax.scatter(start.ravel(), target.ravel())
ax.plot(start.ravel(), multscaler.scaled.ravel(), 'r', label='Multi')
ax.plot(start.ravel(), addscaler.scaled.ravel(), 'g', label='Add')
ax.legend(loc='best')
ax.set_title('Multi: Resid. mean: {:.3f}, Add: Resid. mean: {:.3f}'
.format(multscaler.residual.mean(), addscaler.residual.mean()))
In [190]:
leastsq?
In [212]:
np.reshape?
In [ ]: